Let’s Build a Chatbot

Using GitHub Copilot, ChatGPT, & more in your favorite IDE

James Wade

Creating OpenAI API Calls

An Example from OpenAI Documentation

curl https://api.openai.com/v1/chat/completions \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $OPENAI_API_KEY" \
-d '{
"model": "gpt-3.5-turbo",
"messages": [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Hello!"}]
}'

Constructing Messages for OpenAI

The message body:

{
  "model": "gpt-3.5-turbo",
  "messages": [
    {"role": "system", "content": "You are a helpful assistant."},
    {"role": "user", "content": "Hello!"}
  ]
}

Send requests with {httr2}

library(httr2)
library(purrr)

Send requests with {httr2}

library(httr2)
library(purrr)

# construct the message body
user_message <- list(list(role = "user", content = "Hello"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)
api_key <- Sys.getenv("OPENAI_API_KEY")

Send requests with {httr2}

library(httr2)
library(purrr)

# construct the message body
user_message <- list(list(role = "user", content = "Hello!"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)
api_key <- Sys.getenv("OPENAI_API_KEY")

# send the request
resp <-
  request("https://api.openai.com/v1") |> 
  req_url_path_append("chat/completions") |> 
  req_auth_bearer_token(token = api_key) |> 
  req_body_json(body) |> 
  req_perform()

Send requests with {httr2}

# construct the message body
user_message <- list(list(role = "user", content = "Hello!"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)
api_key <- Sys.getenv("OPENAI_API_KEY")

# send the request
resp <-
  request("https://api.openai.com/v1") |>
  req_url_path_append("chat/completions") |> 
  req_auth_bearer_token(token = api_key) |> 
  req_body_json(body) |> 
  req_perform()

# process the repsonse
resp |>
  resp_body_json(simplifyVector = TRUE) |> 
  pluck("choices", "message", "content")
[1] "Hi there! How can I assist you today?"

Examining the Response

resp |> 
  resp_body_json(simplifyVector = TRUE)
$id
[1] "chatcmpl-8ssMI3rY8HBUdZxc20zEMZIdZCAHV"

$object
[1] "chat.completion"

$created
[1] 1708089358

$model
[1] "gpt-3.5-turbo-0613"

$choices
  index message.role                       message.content logprobs
1     0    assistant Hi there! How can I assist you today?       NA
  finish_reason
1          stop

$usage
$usage$prompt_tokens
[1] 9

$usage$completion_tokens
[1] 10

$usage$total_tokens
[1] 19


$system_fingerprint
NULL

Wrapping it in a function

library(httr2)
library(purrr)

chat <- function(message, api_key = Sys.getenv("OPENAI_API_KEY")) {
  user_message <- list(list(role = "user", content = message))
  body <- list(model = "gpt-3.5-turbo",
               messages = user_message)
  resp <-
    request("https://api.openai.com/v1") |> 
    req_url_path_append("chat/completions") |> 
    req_auth_bearer_token(token = api_key) |> 
    req_body_json(body) |> 
    req_perform()
  
  resp |> 
    resp_body_json(simplifyVector = TRUE) |> 
    pluck("choices", "message", "content")
}

Trying out chat()

chat("What is your favorite color?")
[1] "As an AI, I don't have personal preferences or emotions, so I don't have a favorite color."

chat("Show me a simple ggplot2 example. Only code with comments. Be brief.")
[1] "```{r}\n# Load the ggplot2 library\nlibrary(ggplot2)\n\n# Create a dataset\ndata <- data.frame(x = c(1, 2, 3, 4, 5), y = c(2, 4, 6, 8, 10))\n\n# Create a scatter plot using ggplot2\nggplot(data, aes(x = x, y = y)) +\n  geom_point()  # Add points to the plot\n```"

A Prettier Reponse

answer <- chat("Make a ggplot2 in an RMarkdown document and briefly tell me
               what you made.")
answer |> cat()
Sure! Here is an example of creating a ggplot2 in an RMarkdown document using the built-in `mtcars` dataset.

```{r echo=FALSE, message=FALSE, warning=FALSE}
# Load Required Libraries
library(ggplot2)

# Create the Plot
ggplot(data = mtcars) +
  geom_point(aes(x = mpg, y = disp, color = gear)) +
  labs(title = "MPG vs. Displacement", x = "Miles Per Gallon", y = "Engine Displacement", color = "Gears")
```

In this example, I created a scatter plot using `ggplot2` library to visualize the relationship between Miles Per Gallon (mpg) and Engine Displacement for different types of gears (3, 4, and 5). Each data point is represented by a dot on the plot, where the x-axis represents the mpg and the y-axis represents the displacement of the engine. The color of the dots represents the number of gears in each car. The plot is titled "MPG vs. Displacement" and the x-axis and y-axis are labeled accordingly. The legend shows the color mapping for the number of gears.

An Even Prettier Response

answer |> shiny::markdown()

Sure! Here is an example of creating a ggplot2 in an RMarkdown document using the built-in mtcars dataset.

# Load Required Libraries
library(ggplot2)

# Create the Plot
ggplot(data = mtcars) +
  geom_point(aes(x = mpg, y = disp, color = gear)) +
  labs(title = "MPG vs. Displacement", x = "Miles Per Gallon", y = "Engine Displacement", color = "Gears")

In this example, I created a scatter plot using ggplot2 library to visualize the relationship between Miles Per Gallon (mpg) and Engine Displacement for different types of gears (3, 4, and 5). Each data point is represented by a dot on the plot, where the x-axis represents the mpg and the y-axis represents the displacement of the engine. The color of the dots represents the number of gears in each car. The plot is titled "MPG vs. Displacement" and the x-axis and y-axis are labeled accordingly. The legend shows the color mapping for the number of gears.

Some Helper Functions

chat()

chat <- function(user_message, 
                 history = NULL,
                 system_prompt = c("general", "code"),
                 api_key = Sys.getenv("OPENAI_API_KEY")) {
  system   <- get_system_prompt(system_prompt)
  prompt   <- prepare_prompt(user_message, system_prompt, history)
  base_url <- "https://api.openai.com/v1"
  body     <- list(model = "gpt-3.5-turbo",
                   messages = prompt)
  
  # <httr2_request_pipeline>
  # <process_response>
}

Helper Functions

get_system_prompt()

get_system_prompt <- function(system = c("general", "code")) {
  instructions <- 
    switch(system,
           "general" = "You are a helpful assistant.",
           "code"    = "<code_assistant_prompt>")
  list(list(role = "system", content = instructions))
}


prepare_prompt()

prepare_prompt <- function(user_message, system_prompt, history) {
  user_prompt <-  list(list(role = "user", content = user_message))
  c(system_prompt, history, user_prompt) |> compact()
}